From bee05c8d839f8fe9ad67d3b8b82b64a4f727ac06 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Mon, 12 Apr 2021 01:54:05 +0000 Subject: [PATCH 1/3] CodeGen from PR 13833 in Azure/azure-rest-api-specs datashare readme t2 config (#13833) --- .../azure-mgmt-datashare/MANIFEST.in | 1 + sdk/datashare/azure-mgmt-datashare/_meta.json | 8 + .../azure/mgmt/datashare/_configuration.py | 2 +- .../azure/mgmt/datashare/_metadata.json | 120 ++++ .../azure/mgmt/datashare/_version.py | 3 +- .../mgmt/datashare/aio/_configuration.py | 2 +- .../aio/operations/_accounts_operations.py | 32 +- .../_consumer_invitations_operations.py | 16 +- .../_consumer_source_data_sets_operations.py | 6 +- .../_data_set_mappings_operations.py | 16 +- .../aio/operations/_data_sets_operations.py | 20 +- .../aio/operations/_invitations_operations.py | 16 +- .../datashare/aio/operations/_operations.py | 4 +- ...provider_share_subscriptions_operations.py | 107 ++- .../_share_subscriptions_operations.py | 52 +- .../aio/operations/_shares_operations.py | 28 +- .../_synchronization_settings_operations.py | 22 +- .../aio/operations/_triggers_operations.py | 24 +- .../azure/mgmt/datashare/models/__init__.py | 25 +- .../_data_share_management_client_enums.py | 85 ++- .../azure/mgmt/datashare/models/_models.py | 617 +++++++++++++---- .../mgmt/datashare/models/_models_py3.py | 634 ++++++++++++++---- .../operations/_accounts_operations.py | 32 +- .../_consumer_invitations_operations.py | 16 +- .../_consumer_source_data_sets_operations.py | 6 +- .../_data_set_mappings_operations.py | 16 +- .../operations/_data_sets_operations.py | 20 +- .../operations/_invitations_operations.py | 16 +- .../mgmt/datashare/operations/_operations.py | 4 +- ...provider_share_subscriptions_operations.py | 108 ++- .../_share_subscriptions_operations.py | 52 +- .../operations/_shares_operations.py | 28 +- .../_synchronization_settings_operations.py | 22 +- .../operations/_triggers_operations.py | 24 +- 34 files changed, 1612 insertions(+), 572 deletions(-) create mode 100644 sdk/datashare/azure-mgmt-datashare/_meta.json create mode 100644 sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_metadata.json diff --git a/sdk/datashare/azure-mgmt-datashare/MANIFEST.in b/sdk/datashare/azure-mgmt-datashare/MANIFEST.in index a3cb07df8765..3a9b6517412b 100644 --- a/sdk/datashare/azure-mgmt-datashare/MANIFEST.in +++ b/sdk/datashare/azure-mgmt-datashare/MANIFEST.in @@ -1,3 +1,4 @@ +include _meta.json recursive-include tests *.py *.yaml include *.md include azure/__init__.py diff --git a/sdk/datashare/azure-mgmt-datashare/_meta.json b/sdk/datashare/azure-mgmt-datashare/_meta.json new file mode 100644 index 000000000000..77e7bad2531f --- /dev/null +++ b/sdk/datashare/azure-mgmt-datashare/_meta.json @@ -0,0 +1,8 @@ +{ + "autorest": "3.0.6369", + "use": "@autorest/python@5.6.2", + "commit": "36084f10aacb98d77af2f67c9d57fca2bbe68e97", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/datashare/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.6.2 --version=3.0.6369", + "readme": "specification/datashare/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_configuration.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_configuration.py index b23073ed1746..f37d9a76a294 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_configuration.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_configuration.py @@ -48,7 +48,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id - self.api_version = "2019-11-01" + self.api_version = "2020-09-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-datashare/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_metadata.json b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_metadata.json new file mode 100644 index 000000000000..0e83432b54a1 --- /dev/null +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_metadata.json @@ -0,0 +1,120 @@ +{ + "chosen_version": "2020-09-01", + "total_api_version_list": ["2020-09-01"], + "client": { + "name": "DataShareManagementClient", + "filename": "_data_share_management_client", + "description": "Creates a Microsoft.DataShare management client.", + "base_url": "\u0027https://management.azure.com\u0027", + "custom_base_url": null, + "azure_arm": true, + "has_lro_operations": true, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataShareManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"DataShareManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}}" + }, + "global_parameters": { + "sync": { + "credential": { + "signature": "credential, # type: \"TokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials.TokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id, # type: str", + "description": "The subscription identifier.", + "docstring_type": "str", + "required": true + } + }, + "async": { + "credential": { + "signature": "credential: \"AsyncTokenCredential\",", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id: str,", + "description": "The subscription identifier.", + "docstring_type": "str", + "required": true + } + }, + "constant": { + }, + "call": "credential, subscription_id", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=None, # type: Optional[str]", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: Optional[str] = None,", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } + }, + "config": { + "credential": true, + "credential_scopes": ["https://management.azure.com/.default"], + "credential_default_policy_type": "BearerTokenCredentialPolicy", + "credential_default_policy_type_has_async_version": true, + "credential_key_header_name": null, + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" + }, + "operation_groups": { + "accounts": "AccountsOperations", + "consumer_invitations": "ConsumerInvitationsOperations", + "data_sets": "DataSetsOperations", + "data_set_mappings": "DataSetMappingsOperations", + "invitations": "InvitationsOperations", + "operations": "Operations", + "shares": "SharesOperations", + "provider_share_subscriptions": "ProviderShareSubscriptionsOperations", + "share_subscriptions": "ShareSubscriptionsOperations", + "consumer_source_data_sets": "ConsumerSourceDataSetsOperations", + "synchronization_settings": "SynchronizationSettingsOperations", + "triggers": "TriggersOperations" + }, + "operation_mixins": { + "sync_imports": "None", + "async_imports": "None", + "operations": { + } + } +} \ No newline at end of file diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py index 515f51c112dd..d089401f1756 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py @@ -6,5 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0b1" - +VERSION = "2019-11-01T00:00:00.000Z" diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/_configuration.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/_configuration.py index 0e202f8141fc..40ffa1a365b4 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/_configuration.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/_configuration.py @@ -45,7 +45,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id - self.api_version = "2019-11-01" + self.api_version = "2020-09-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-datashare/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_accounts_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_accounts_operations.py index 618f4e6a1aa8..7418fac88112 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_accounts_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_accounts_operations.py @@ -64,7 +64,7 @@ def list_by_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -106,7 +106,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -141,7 +141,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -167,7 +167,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Account', pipeline_response) @@ -190,7 +190,7 @@ async def _create_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -221,7 +221,7 @@ async def _create_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -255,8 +255,8 @@ async def begin_create( :type account: ~azure.mgmt.datashare.models.Account :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Account or the result of cls(response) @@ -320,7 +320,7 @@ async def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -346,7 +346,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -375,8 +375,8 @@ async def begin_delete( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response) @@ -455,7 +455,7 @@ async def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -486,7 +486,7 @@ async def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Account', pipeline_response) @@ -521,7 +521,7 @@ def list_by_resource_group( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -564,7 +564,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_consumer_invitations_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_consumer_invitations_operations.py index 119e50dda931..57397990deae 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_consumer_invitations_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_consumer_invitations_operations.py @@ -62,7 +62,7 @@ def list_invitations( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -100,7 +100,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -109,7 +109,7 @@ async def get_next(next_link=None): return AsyncItemPaged( get_next, extract_data ) - list_invitations.metadata = {'url': '/providers/Microsoft.DataShare/ListInvitations'} # type: ignore + list_invitations.metadata = {'url': '/providers/Microsoft.DataShare/listInvitations'} # type: ignore async def get( self, @@ -135,7 +135,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -160,7 +160,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ConsumerInvitation', pipeline_response) @@ -195,7 +195,7 @@ async def reject_invitation( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -224,7 +224,7 @@ async def reject_invitation( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ConsumerInvitation', pipeline_response) @@ -233,4 +233,4 @@ async def reject_invitation( return cls(pipeline_response, deserialized, {}) return deserialized - reject_invitation.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/RejectInvitation'} # type: ignore + reject_invitation.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/rejectInvitation'} # type: ignore diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_consumer_source_data_sets_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_consumer_source_data_sets_operations.py index d9dc17b5a231..649e098dee9a 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_consumer_source_data_sets_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_consumer_source_data_sets_operations.py @@ -71,7 +71,7 @@ def list_by_share_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -116,7 +116,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -125,4 +125,4 @@ async def get_next(next_link=None): return AsyncItemPaged( get_next, extract_data ) - list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/ConsumerSourceDataSets'} # type: ignore + list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/consumerSourceDataSets'} # type: ignore diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_data_set_mappings_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_data_set_mappings_operations.py index c1fae7705d2c..da14e60e016b 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_data_set_mappings_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_data_set_mappings_operations.py @@ -71,7 +71,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -99,7 +99,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('DataSetMapping', pipeline_response) @@ -145,7 +145,7 @@ async def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -178,7 +178,7 @@ async def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -223,7 +223,7 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -251,7 +251,7 @@ async def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -295,7 +295,7 @@ def list_by_share_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -344,7 +344,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_data_sets_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_data_sets_operations.py index dcd2198bef35..5081c2602ee2 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_data_sets_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_data_sets_operations.py @@ -73,7 +73,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -101,7 +101,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('DataSet', pipeline_response) @@ -145,7 +145,7 @@ async def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -178,7 +178,7 @@ async def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -206,7 +206,7 @@ async def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -234,7 +234,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -264,8 +264,8 @@ async def begin_delete( :type data_set_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -354,7 +354,7 @@ def list_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -403,7 +403,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_invitations_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_invitations_operations.py index 8711d89cd37b..9fcf74312894 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_invitations_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_invitations_operations.py @@ -71,7 +71,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -99,7 +99,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Invitation', pipeline_response) @@ -143,7 +143,7 @@ async def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -176,7 +176,7 @@ async def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -221,7 +221,7 @@ async def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -249,7 +249,7 @@ async def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -293,7 +293,7 @@ def list_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -342,7 +342,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_operations.py index eb0cd4b02de0..ba71b9027376 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_operations.py @@ -59,7 +59,7 @@ def list( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -95,7 +95,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_provider_share_subscriptions_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_provider_share_subscriptions_operations.py index c8202637053f..6f4bfa458b77 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_provider_share_subscriptions_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_provider_share_subscriptions_operations.py @@ -43,12 +43,90 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config + async def adjust( + self, + resource_group_name: str, + account_name: str, + share_name: str, + provider_share_subscription_id: str, + provider_share_subscription: "_models.ProviderShareSubscription", + **kwargs + ) -> "_models.ProviderShareSubscription": + """Adjust the expiration date of a share subscription in a provider share. + + Adjust a share subscription's expiration date in a provider share. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param account_name: The name of the share account. + :type account_name: str + :param share_name: The name of the share. + :type share_name: str + :param provider_share_subscription_id: To locate shareSubscription. + :type provider_share_subscription_id: str + :param provider_share_subscription: The provider share subscription. + :type provider_share_subscription: ~azure.mgmt.datashare.models.ProviderShareSubscription + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ProviderShareSubscription, or the result of cls(response) + :rtype: ~azure.mgmt.datashare.models.ProviderShareSubscription + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ProviderShareSubscription"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-09-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.adjust.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'shareName': self._serialize.url("share_name", share_name, 'str'), + 'providerShareSubscriptionId': self._serialize.url("provider_share_subscription_id", provider_share_subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(provider_share_subscription, 'ProviderShareSubscription') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ProviderShareSubscription', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + adjust.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/adjust'} # type: ignore + async def reinstate( self, resource_group_name: str, account_name: str, share_name: str, provider_share_subscription_id: str, + provider_share_subscription: "_models.ProviderShareSubscription", **kwargs ) -> "_models.ProviderShareSubscription": """Reinstate share subscription in a provider share. @@ -63,6 +141,8 @@ async def reinstate( :type share_name: str :param provider_share_subscription_id: To locate shareSubscription. :type provider_share_subscription_id: str + :param provider_share_subscription: The provider share subscription. + :type provider_share_subscription: ~azure.mgmt.datashare.models.ProviderShareSubscription :keyword callable cls: A custom type or function that will be passed the direct response :return: ProviderShareSubscription, or the result of cls(response) :rtype: ~azure.mgmt.datashare.models.ProviderShareSubscription @@ -73,7 +153,8 @@ async def reinstate( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" + content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL @@ -93,15 +174,19 @@ async def reinstate( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - request = self._client.post(url, query_parameters, header_parameters) + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(provider_share_subscription, 'ProviderShareSubscription') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProviderShareSubscription', pipeline_response) @@ -125,7 +210,7 @@ async def _revoke_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -153,7 +238,7 @@ async def _revoke_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -190,8 +275,8 @@ async def begin_revoke( :type provider_share_subscription_id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ProviderShareSubscription or the result of cls(response) @@ -277,7 +362,7 @@ async def get_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -305,7 +390,7 @@ async def get_by_share( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProviderShareSubscription', pipeline_response) @@ -346,7 +431,7 @@ def list_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -391,7 +476,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_share_subscriptions_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_share_subscriptions_operations.py index b456f8667748..77d5d06098c2 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_share_subscriptions_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_share_subscriptions_operations.py @@ -56,7 +56,7 @@ async def _cancel_synchronization_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -88,7 +88,7 @@ async def _cancel_synchronization_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -125,8 +125,8 @@ async def begin_cancel_synchronization( :type share_subscription_synchronization: ~azure.mgmt.datashare.models.ShareSubscriptionSynchronization :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ShareSubscriptionSynchronization or the result of cls(response) @@ -211,7 +211,7 @@ def list_source_share_synchronization_settings( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -256,7 +256,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -306,7 +306,7 @@ def list_synchronization_details( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = "application/json" accept = "application/json" @@ -363,7 +363,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -410,7 +410,7 @@ def list_synchronizations( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -459,7 +459,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -483,7 +483,7 @@ async def _synchronize_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -515,7 +515,7 @@ async def _synchronize_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -528,7 +528,7 @@ async def _synchronize_initial( return cls(pipeline_response, deserialized, {}) return deserialized - _synchronize_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/Synchronize'} # type: ignore + _synchronize_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/synchronize'} # type: ignore async def begin_synchronize( self, @@ -552,8 +552,8 @@ async def begin_synchronize( :type synchronize: ~azure.mgmt.datashare.models.Synchronize :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either ShareSubscriptionSynchronization or the result of cls(response) @@ -606,7 +606,7 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_synchronize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/Synchronize'} # type: ignore + begin_synchronize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/synchronize'} # type: ignore async def get( self, @@ -635,7 +635,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -662,7 +662,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ShareSubscription', pipeline_response) @@ -703,7 +703,7 @@ async def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -735,7 +735,7 @@ async def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -762,7 +762,7 @@ async def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -789,7 +789,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -821,8 +821,8 @@ async def begin_delete( :type share_subscription_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response) @@ -909,7 +909,7 @@ def list_by_account( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -957,7 +957,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_shares_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_shares_operations.py index f4225b83dfac..7cb09c5f00ec 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_shares_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_shares_operations.py @@ -82,7 +82,7 @@ def list_synchronization_details( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = "application/json" accept = "application/json" @@ -139,7 +139,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -186,7 +186,7 @@ def list_synchronizations( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -235,7 +235,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -273,7 +273,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -300,7 +300,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Share', pipeline_response) @@ -341,7 +341,7 @@ async def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -373,7 +373,7 @@ async def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -400,7 +400,7 @@ async def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -427,7 +427,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -459,8 +459,8 @@ async def begin_delete( :type share_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response) @@ -547,7 +547,7 @@ def list_by_account( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -595,7 +595,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_synchronization_settings_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_synchronization_settings_operations.py index 84ea4631be20..15b4f4038d9f 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_synchronization_settings_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_synchronization_settings_operations.py @@ -73,7 +73,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -101,7 +101,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('SynchronizationSetting', pipeline_response) @@ -123,7 +123,7 @@ async def create( ) -> "_models.SynchronizationSetting": """Adds a new synchronization setting to an existing share. - Create or update a synchronizationSetting. + Create a synchronizationSetting. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -145,7 +145,7 @@ async def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -178,7 +178,7 @@ async def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -206,7 +206,7 @@ async def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -234,7 +234,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -269,8 +269,8 @@ async def begin_delete( :type synchronization_setting_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response) @@ -356,7 +356,7 @@ def list_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -401,7 +401,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_triggers_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_triggers_operations.py index 36ee6be3a52e..47dd836b245f 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_triggers_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/aio/operations/_triggers_operations.py @@ -73,7 +73,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -101,7 +101,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Trigger', pipeline_response) @@ -126,7 +126,7 @@ async def _create_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -159,7 +159,7 @@ async def _create_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -200,8 +200,8 @@ async def begin_create( :type trigger: ~azure.mgmt.datashare.models.Trigger :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Trigger or the result of cls(response) @@ -271,7 +271,7 @@ async def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -299,7 +299,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -334,8 +334,8 @@ async def begin_delete( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either OperationResponse or the result of cls(response) @@ -421,7 +421,7 @@ def list_by_share_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -466,7 +466,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/__init__.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/__init__.py index 20977bd15279..49f874f969e9 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/__init__.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/__init__.py @@ -70,11 +70,14 @@ from ._models_py3 import SqlDBTableDataSetMapping from ._models_py3 import SqlDWTableDataSet from ._models_py3 import SqlDWTableDataSetMapping + from ._models_py3 import SynapseWorkspaceSqlPoolTableDataSet + from ._models_py3 import SynapseWorkspaceSqlPoolTableDataSetMapping from ._models_py3 import SynchronizationDetails from ._models_py3 import SynchronizationDetailsList from ._models_py3 import SynchronizationSetting from ._models_py3 import SynchronizationSettingList from ._models_py3 import Synchronize + from ._models_py3 import SystemData from ._models_py3 import Trigger from ._models_py3 import TriggerList except (SyntaxError, ImportError): @@ -141,26 +144,35 @@ from ._models import SqlDBTableDataSetMapping # type: ignore from ._models import SqlDWTableDataSet # type: ignore from ._models import SqlDWTableDataSetMapping # type: ignore + from ._models import SynapseWorkspaceSqlPoolTableDataSet # type: ignore + from ._models import SynapseWorkspaceSqlPoolTableDataSetMapping # type: ignore from ._models import SynchronizationDetails # type: ignore from ._models import SynchronizationDetailsList # type: ignore from ._models import SynchronizationSetting # type: ignore from ._models import SynchronizationSettingList # type: ignore from ._models import Synchronize # type: ignore + from ._models import SystemData # type: ignore from ._models import Trigger # type: ignore from ._models import TriggerList # type: ignore from ._data_share_management_client_enums import ( + CreatedByType, + DataSetKind, + DataSetMappingKind, DataSetMappingStatus, DataSetType, InvitationStatus, - Kind, + LastModifiedByType, OutputType, ProvisioningState, RecurrenceInterval, ShareKind, ShareSubscriptionStatus, + SourceShareSynchronizationSettingKind, Status, SynchronizationMode, + SynchronizationSettingKind, + TriggerKind, TriggerStatus, Type, ) @@ -229,24 +241,33 @@ 'SqlDBTableDataSetMapping', 'SqlDWTableDataSet', 'SqlDWTableDataSetMapping', + 'SynapseWorkspaceSqlPoolTableDataSet', + 'SynapseWorkspaceSqlPoolTableDataSetMapping', 'SynchronizationDetails', 'SynchronizationDetailsList', 'SynchronizationSetting', 'SynchronizationSettingList', 'Synchronize', + 'SystemData', 'Trigger', 'TriggerList', + 'CreatedByType', + 'DataSetKind', + 'DataSetMappingKind', 'DataSetMappingStatus', 'DataSetType', 'InvitationStatus', - 'Kind', + 'LastModifiedByType', 'OutputType', 'ProvisioningState', 'RecurrenceInterval', 'ShareKind', 'ShareSubscriptionStatus', + 'SourceShareSynchronizationSettingKind', 'Status', 'SynchronizationMode', + 'SynchronizationSettingKind', + 'TriggerKind', 'TriggerStatus', 'Type', ] diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_data_share_management_client_enums.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_data_share_management_client_enums.py index 718f42dc8887..346f1862f57a 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_data_share_management_client_enums.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_data_share_management_client_enums.py @@ -26,15 +26,17 @@ def __getattr__(cls, name): raise AttributeError(name) -class DataSetMappingStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Gets the status of the data set mapping. +class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity that created the resource. """ - OK = "Ok" - BROKEN = "Broken" + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" -class DataSetType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Type of data set +class DataSetKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Kind of data set. """ BLOB = "Blob" @@ -49,18 +51,33 @@ class DataSetType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): KUSTO_DATABASE = "KustoDatabase" SQL_DB_TABLE = "SqlDBTable" SQL_DW_TABLE = "SqlDWTable" + SYNAPSE_WORKSPACE_SQL_POOL_TABLE = "SynapseWorkspaceSqlPoolTable" -class InvitationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The status of the invitation. +class DataSetMappingKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Kind of data set mapping. """ - PENDING = "Pending" - ACCEPTED = "Accepted" - REJECTED = "Rejected" - WITHDRAWN = "Withdrawn" + BLOB = "Blob" + CONTAINER = "Container" + BLOB_FOLDER = "BlobFolder" + ADLS_GEN2_FILE_SYSTEM = "AdlsGen2FileSystem" + ADLS_GEN2_FOLDER = "AdlsGen2Folder" + ADLS_GEN2_FILE = "AdlsGen2File" + KUSTO_CLUSTER = "KustoCluster" + KUSTO_DATABASE = "KustoDatabase" + SQL_DB_TABLE = "SqlDBTable" + SQL_DW_TABLE = "SqlDWTable" + SYNAPSE_WORKSPACE_SQL_POOL_TABLE = "SynapseWorkspaceSqlPoolTable" -class Kind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Kind of data set. +class DataSetMappingStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Gets the status of the data set mapping. + """ + + OK = "Ok" + BROKEN = "Broken" + +class DataSetType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of data set """ BLOB = "Blob" @@ -75,7 +92,25 @@ class Kind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): KUSTO_DATABASE = "KustoDatabase" SQL_DB_TABLE = "SqlDBTable" SQL_DW_TABLE = "SqlDWTable" - SCHEDULE_BASED = "ScheduleBased" + SYNAPSE_WORKSPACE_SQL_POOL_TABLE = "SynapseWorkspaceSqlPoolTable" + +class InvitationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of the invitation. + """ + + PENDING = "Pending" + ACCEPTED = "Accepted" + REJECTED = "Rejected" + WITHDRAWN = "Withdrawn" + +class LastModifiedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity that last modified the resource. + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" class OutputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Type of output file @@ -85,7 +120,7 @@ class OutputType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): PARQUET = "Parquet" class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Provisioning state of the kusto cluster data set. + """Provisioning state of the data set mapping. """ SUCCEEDED = "Succeeded" @@ -117,6 +152,12 @@ class ShareSubscriptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum SOURCE_DELETED = "SourceDeleted" REVOKING = "Revoking" +class SourceShareSynchronizationSettingKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Kind of synchronization setting on share. + """ + + SCHEDULE_BASED = "ScheduleBased" + class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Operation state of the long running operation. """ @@ -135,6 +176,18 @@ class SynchronizationMode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): INCREMENTAL = "Incremental" FULL_SYNC = "FullSync" +class SynchronizationSettingKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Kind of synchronization setting. + """ + + SCHEDULE_BASED = "ScheduleBased" + +class TriggerKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Kind of synchronization on trigger. + """ + + SCHEDULE_BASED = "ScheduleBased" + class TriggerStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Gets the trigger state """ diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_models.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_models.py index a048c3c25bd8..8b4218cc1123 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_models.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_models.py @@ -10,35 +10,79 @@ import msrest.serialization -class DefaultDto(msrest.serialization.Model): +class ProxyDto(msrest.serialization.Model): + """Base data transfer object implementation for proxy resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource id of the azure resource. + :vartype id: str + :ivar name: Name of the azure resource. + :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData + :ivar type: Type of the azure resource. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'system_data': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyDto, self).__init__(**kwargs) + self.id = None + self.name = None + self.system_data = None + self.type = None + + +class DefaultDto(ProxyDto): """Base data transfer object implementation for default resources. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The resource id of the azure resource. :vartype id: str - :param location: Location of the azure resource. - :type location: str :ivar name: Name of the azure resource. :vartype name: str - :param tags: A set of tags. Tags on the azure resource. - :type tags: dict[str, str] + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str + :param location: Location of the azure resource. + :type location: str + :param tags: A set of tags. Tags on the azure resource. + :type tags: dict[str, str] """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( @@ -46,11 +90,8 @@ def __init__( **kwargs ): super(DefaultDto, self).__init__(**kwargs) - self.id = None self.location = kwargs.get('location', None) - self.name = None self.tags = kwargs.get('tags', None) - self.type = None class Account(DefaultDto): @@ -62,14 +103,16 @@ class Account(DefaultDto): :ivar id: The resource id of the azure resource. :vartype id: str - :param location: Location of the azure resource. - :type location: str :ivar name: Name of the azure resource. :vartype name: str - :param tags: A set of tags. Tags on the azure resource. - :type tags: dict[str, str] + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str + :param location: Location of the azure resource. + :type location: str + :param tags: A set of tags. Tags on the azure resource. + :type tags: dict[str, str] :param identity: Required. Identity Info on the Account. :type identity: ~azure.mgmt.datashare.models.Identity :ivar created_at: Time at which the account was created. @@ -86,6 +129,7 @@ class Account(DefaultDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'identity': {'required': True}, 'created_at': {'readonly': True}, @@ -96,10 +140,11 @@ class Account(DefaultDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, 'identity': {'key': 'identity', 'type': 'Identity'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, @@ -167,46 +212,11 @@ def __init__( self.tags = kwargs.get('tags', None) -class ProxyDto(msrest.serialization.Model): - """Base data transfer object implementation for proxy resources. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource id of the azure resource. - :vartype id: str - :ivar name: Name of the azure resource. - :vartype name: str - :ivar type: Type of the azure resource. - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ProxyDto, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - - class DataSet(ProxyDto): """A DataSet data transfer object. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ADLSGen1FileDataSet, ADLSGen1FolderDataSet, ADLSGen2FileDataSet, ADLSGen2FileSystemDataSet, ADLSGen2FolderDataSet, BlobDataSet, BlobFolderDataSet, BlobContainerDataSet, KustoClusterDataSet, KustoDatabaseDataSet, SqlDBTableDataSet, SqlDWTableDataSet. + sub-classes are: ADLSGen1FileDataSet, ADLSGen1FolderDataSet, ADLSGen2FileDataSet, ADLSGen2FileSystemDataSet, ADLSGen2FolderDataSet, BlobDataSet, BlobFolderDataSet, BlobContainerDataSet, KustoClusterDataSet, KustoDatabaseDataSet, SqlDBTableDataSet, SqlDWTableDataSet, SynapseWorkspaceSqlPoolTableDataSet. Variables are only populated by the server, and will be ignored when sending a request. @@ -216,18 +226,21 @@ class DataSet(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, } @@ -235,12 +248,13 @@ class DataSet(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, } _subtype_map = { - 'kind': {'AdlsGen1File': 'ADLSGen1FileDataSet', 'AdlsGen1Folder': 'ADLSGen1FolderDataSet', 'AdlsGen2File': 'ADLSGen2FileDataSet', 'AdlsGen2FileSystem': 'ADLSGen2FileSystemDataSet', 'AdlsGen2Folder': 'ADLSGen2FolderDataSet', 'Blob': 'BlobDataSet', 'BlobFolder': 'BlobFolderDataSet', 'Container': 'BlobContainerDataSet', 'KustoCluster': 'KustoClusterDataSet', 'KustoDatabase': 'KustoDatabaseDataSet', 'SqlDBTable': 'SqlDBTableDataSet', 'SqlDWTable': 'SqlDWTableDataSet'} + 'kind': {'AdlsGen1File': 'ADLSGen1FileDataSet', 'AdlsGen1Folder': 'ADLSGen1FolderDataSet', 'AdlsGen2File': 'ADLSGen2FileDataSet', 'AdlsGen2FileSystem': 'ADLSGen2FileSystemDataSet', 'AdlsGen2Folder': 'ADLSGen2FolderDataSet', 'Blob': 'BlobDataSet', 'BlobFolder': 'BlobFolderDataSet', 'Container': 'BlobContainerDataSet', 'KustoCluster': 'KustoClusterDataSet', 'KustoDatabase': 'KustoDatabaseDataSet', 'SqlDBTable': 'SqlDBTableDataSet', 'SqlDWTable': 'SqlDWTableDataSet', 'SynapseWorkspaceSqlPoolTable': 'SynapseWorkspaceSqlPoolTableDataSet'} } def __init__( @@ -262,13 +276,15 @@ class ADLSGen1FileDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param account_name: Required. The ADLS account name. :type account_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -286,6 +302,7 @@ class ADLSGen1FileDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'account_name': {'required': True}, @@ -299,6 +316,7 @@ class ADLSGen1FileDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'account_name': {'key': 'properties.accountName', 'type': 'str'}, @@ -334,13 +352,15 @@ class ADLSGen1FolderDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param account_name: Required. The ADLS account name. :type account_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -356,6 +376,7 @@ class ADLSGen1FolderDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'account_name': {'required': True}, @@ -368,6 +389,7 @@ class ADLSGen1FolderDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'account_name': {'key': 'properties.accountName', 'type': 'str'}, @@ -401,13 +423,15 @@ class ADLSGen2FileDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param file_path: Required. File path within the file system. @@ -425,6 +449,7 @@ class ADLSGen2FileDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -438,6 +463,7 @@ class ADLSGen2FileDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -466,7 +492,7 @@ class DataSetMapping(ProxyDto): """A data set mapping data transfer object. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ADLSGen2FileDataSetMapping, ADLSGen2FileSystemDataSetMapping, ADLSGen2FolderDataSetMapping, BlobDataSetMapping, BlobFolderDataSetMapping, BlobContainerDataSetMapping, KustoClusterDataSetMapping, KustoDatabaseDataSetMapping, SqlDBTableDataSetMapping, SqlDWTableDataSetMapping. + sub-classes are: ADLSGen2FileDataSetMapping, ADLSGen2FileSystemDataSetMapping, ADLSGen2FolderDataSetMapping, BlobDataSetMapping, BlobFolderDataSetMapping, BlobContainerDataSetMapping, KustoClusterDataSetMapping, KustoDatabaseDataSetMapping, SqlDBTableDataSetMapping, SqlDWTableDataSetMapping, SynapseWorkspaceSqlPoolTableDataSetMapping. Variables are only populated by the server, and will be ignored when sending a request. @@ -476,18 +502,21 @@ class DataSetMapping(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, } @@ -495,12 +524,13 @@ class DataSetMapping(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, } _subtype_map = { - 'kind': {'AdlsGen2File': 'ADLSGen2FileDataSetMapping', 'AdlsGen2FileSystem': 'ADLSGen2FileSystemDataSetMapping', 'AdlsGen2Folder': 'ADLSGen2FolderDataSetMapping', 'Blob': 'BlobDataSetMapping', 'BlobFolder': 'BlobFolderDataSetMapping', 'Container': 'BlobContainerDataSetMapping', 'KustoCluster': 'KustoClusterDataSetMapping', 'KustoDatabase': 'KustoDatabaseDataSetMapping', 'SqlDBTable': 'SqlDBTableDataSetMapping', 'SqlDWTable': 'SqlDWTableDataSetMapping'} + 'kind': {'AdlsGen2File': 'ADLSGen2FileDataSetMapping', 'AdlsGen2FileSystem': 'ADLSGen2FileSystemDataSetMapping', 'AdlsGen2Folder': 'ADLSGen2FolderDataSetMapping', 'Blob': 'BlobDataSetMapping', 'BlobFolder': 'BlobFolderDataSetMapping', 'Container': 'BlobContainerDataSetMapping', 'KustoCluster': 'KustoClusterDataSetMapping', 'KustoDatabase': 'KustoDatabaseDataSetMapping', 'SqlDBTable': 'SqlDBTableDataSetMapping', 'SqlDWTable': 'SqlDWTableDataSetMapping', 'SynapseWorkspaceSqlPoolTable': 'SynapseWorkspaceSqlPoolTableDataSetMapping'} } def __init__( @@ -522,13 +552,15 @@ class ADLSGen2FileDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -554,6 +586,7 @@ class ADLSGen2FileDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -569,6 +602,7 @@ class ADLSGen2FileDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -610,13 +644,15 @@ class ADLSGen2FileSystemDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param file_system: Required. The file system name. @@ -632,6 +668,7 @@ class ADLSGen2FileSystemDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -644,6 +681,7 @@ class ADLSGen2FileSystemDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -677,13 +715,15 @@ class ADLSGen2FileSystemDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -705,6 +745,7 @@ class ADLSGen2FileSystemDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -719,6 +760,7 @@ class ADLSGen2FileSystemDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -756,13 +798,15 @@ class ADLSGen2FolderDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param file_system: Required. File system to which the folder belongs. @@ -780,6 +824,7 @@ class ADLSGen2FolderDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -793,6 +838,7 @@ class ADLSGen2FolderDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -828,13 +874,15 @@ class ADLSGen2FolderDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -858,6 +906,7 @@ class ADLSGen2FolderDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -873,6 +922,7 @@ class ADLSGen2FolderDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -912,13 +962,15 @@ class BlobContainerDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param container_name: Required. BLOB Container name. :type container_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -934,6 +986,7 @@ class BlobContainerDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -946,6 +999,7 @@ class BlobContainerDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -979,13 +1033,15 @@ class BlobContainerDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param container_name: Required. BLOB Container name. :type container_name: str :param data_set_id: Required. The id of the source data set. @@ -1007,6 +1063,7 @@ class BlobContainerDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1021,6 +1078,7 @@ class BlobContainerDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1058,13 +1116,15 @@ class BlobDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param container_name: Required. Container that has the file path. :type container_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -1082,6 +1142,7 @@ class BlobDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1095,6 +1156,7 @@ class BlobDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1130,13 +1192,15 @@ class BlobDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param container_name: Required. Container that has the file path. :type container_name: str :param data_set_id: Required. The id of the source data set. @@ -1162,6 +1226,7 @@ class BlobDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1177,6 +1242,7 @@ class BlobDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1218,13 +1284,15 @@ class BlobFolderDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param container_name: Required. Container that has the file path. :type container_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -1242,6 +1310,7 @@ class BlobFolderDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1255,6 +1324,7 @@ class BlobFolderDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1290,13 +1360,15 @@ class BlobFolderDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param container_name: Required. Container that has the file path. :type container_name: str :param data_set_id: Required. The id of the source data set. @@ -1320,6 +1392,7 @@ class BlobFolderDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1335,6 +1408,7 @@ class BlobFolderDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1374,12 +1448,17 @@ class ConsumerInvitation(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar data_set_count: Number of data sets in a share. :vartype data_set_count: int :ivar description: Description shared when the invitation was created. :vartype description: str + :ivar expiration_date: The expiration date for the share subscription created by accepting the + invitation. + :vartype expiration_date: ~datetime.datetime :param invitation_id: Required. Unique id of the invitation. :type invitation_id: str :ivar invitation_status: The status of the invitation. Possible values include: "Pending", @@ -1410,9 +1489,11 @@ class ConsumerInvitation(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'data_set_count': {'readonly': True}, 'description': {'readonly': True}, + 'expiration_date': {'readonly': True}, 'invitation_id': {'required': True}, 'invitation_status': {'readonly': True}, 'location': {'readonly': True}, @@ -1430,9 +1511,11 @@ class ConsumerInvitation(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'data_set_count': {'key': 'properties.dataSetCount', 'type': 'int'}, 'description': {'key': 'properties.description', 'type': 'str'}, + 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'}, 'invitation_id': {'key': 'properties.invitationId', 'type': 'str'}, 'invitation_status': {'key': 'properties.invitationStatus', 'type': 'str'}, 'location': {'key': 'properties.location', 'type': 'str'}, @@ -1454,6 +1537,7 @@ def __init__( super(ConsumerInvitation, self).__init__(**kwargs) self.data_set_count = None self.description = None + self.expiration_date = None self.invitation_id = kwargs['invitation_id'] self.invitation_status = None self.location = None @@ -1506,6 +1590,8 @@ class ConsumerSourceDataSet(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar data_set_id: DataSet Id. @@ -1518,13 +1604,15 @@ class ConsumerSourceDataSet(ProxyDto): :vartype data_set_path: str :ivar data_set_type: Type of data set. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", - "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable". + "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". :vartype data_set_type: str or ~azure.mgmt.datashare.models.DataSetType """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'data_set_id': {'readonly': True}, 'data_set_location': {'readonly': True}, @@ -1536,6 +1624,7 @@ class ConsumerSourceDataSet(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, 'data_set_location': {'key': 'properties.dataSetLocation', 'type': 'str'}, @@ -1772,8 +1861,12 @@ class Invitation(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str + :param expiration_date: The expiration date for the invitation and share subscription. + :type expiration_date: ~datetime.datetime :ivar invitation_id: unique invitation id. :vartype invitation_id: str :ivar invitation_status: The status of the invitation. Possible values include: "Pending", @@ -1800,6 +1893,7 @@ class Invitation(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'invitation_id': {'readonly': True}, 'invitation_status': {'readonly': True}, @@ -1812,7 +1906,9 @@ class Invitation(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, + 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'}, 'invitation_id': {'key': 'properties.invitationId', 'type': 'str'}, 'invitation_status': {'key': 'properties.invitationStatus', 'type': 'str'}, 'responded_at': {'key': 'properties.respondedAt', 'type': 'iso-8601'}, @@ -1829,6 +1925,7 @@ def __init__( **kwargs ): super(Invitation, self).__init__(**kwargs) + self.expiration_date = kwargs.get('expiration_date', None) self.invitation_id = None self.invitation_status = None self.responded_at = None @@ -1880,13 +1977,15 @@ class KustoClusterDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param kusto_cluster_resource_id: Required. Resource id of the kusto cluster. @@ -1901,6 +2000,7 @@ class KustoClusterDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -1912,6 +2012,7 @@ class KustoClusterDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -1943,13 +2044,15 @@ class KustoClusterDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -1967,6 +2070,7 @@ class KustoClusterDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -1979,6 +2083,7 @@ class KustoClusterDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -2012,13 +2117,15 @@ class KustoDatabaseDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param kusto_database_resource_id: Required. Resource id of the kusto database. @@ -2033,6 +2140,7 @@ class KustoDatabaseDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -2044,6 +2152,7 @@ class KustoDatabaseDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -2075,13 +2184,15 @@ class KustoDatabaseDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -2099,6 +2210,7 @@ class KustoDatabaseDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -2111,6 +2223,7 @@ class KustoDatabaseDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -2202,6 +2315,8 @@ class OperationMetaMetricSpecification(msrest.serialization.Model): :type display_name: str :param enable_regional_mdm_account: enable regional mdm account. :type enable_regional_mdm_account: str + :param fill_gap_with_zero: fill gap with zero. + :type fill_gap_with_zero: bool :param internal_metric_name: internal metric name. :type internal_metric_name: str :param name: name of the metric. @@ -2223,6 +2338,7 @@ class OperationMetaMetricSpecification(msrest.serialization.Model): 'display_description': {'key': 'displayDescription', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'}, 'internal_metric_name': {'key': 'internalMetricName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'}, @@ -2241,6 +2357,7 @@ def __init__( self.display_description = kwargs.get('display_description', None) self.display_name = kwargs.get('display_name', None) self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None) + self.fill_gap_with_zero = kwargs.get('fill_gap_with_zero', None) self.internal_metric_name = kwargs.get('internal_metric_name', None) self.name = kwargs.get('name', None) self.resource_id_dimension_name_override = kwargs.get('resource_id_dimension_name_override', None) @@ -2382,6 +2499,8 @@ class ProviderShareSubscription(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar consumer_email: Email of the consumer who created the share subscription. @@ -2392,6 +2511,8 @@ class ProviderShareSubscription(ProxyDto): :vartype consumer_tenant_name: str :ivar created_at: created at. :vartype created_at: ~datetime.datetime + :param expiration_date: Expiration date of the share subscription in UTC format. + :type expiration_date: ~datetime.datetime :ivar provider_email: Email of the provider who created the share. :vartype provider_email: str :ivar provider_name: Name of the provider who created the share. @@ -2408,6 +2529,7 @@ class ProviderShareSubscription(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'consumer_email': {'readonly': True}, 'consumer_name': {'readonly': True}, @@ -2423,11 +2545,13 @@ class ProviderShareSubscription(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'consumer_email': {'key': 'properties.consumerEmail', 'type': 'str'}, 'consumer_name': {'key': 'properties.consumerName', 'type': 'str'}, 'consumer_tenant_name': {'key': 'properties.consumerTenantName', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'}, 'provider_email': {'key': 'properties.providerEmail', 'type': 'str'}, 'provider_name': {'key': 'properties.providerName', 'type': 'str'}, 'shared_at': {'key': 'properties.sharedAt', 'type': 'iso-8601'}, @@ -2444,6 +2568,7 @@ def __init__( self.consumer_name = None self.consumer_tenant_name = None self.created_at = None + self.expiration_date = kwargs.get('expiration_date', None) self.provider_email = None self.provider_name = None self.shared_at = None @@ -2488,11 +2613,9 @@ class SourceShareSynchronizationSetting(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization setting on share.Constant filled by server. + Possible values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.SourceShareSynchronizationSettingKind """ _validation = { @@ -2520,11 +2643,9 @@ class ScheduledSourceSynchronizationSetting(SourceShareSynchronizationSetting): All required parameters must be populated in order to send to Azure. - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization setting on share.Constant filled by server. + Possible values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.SourceShareSynchronizationSettingKind :param recurrence_interval: Recurrence Interval. Possible values include: "Hour", "Day". :type recurrence_interval: str or ~azure.mgmt.datashare.models.RecurrenceInterval :param synchronization_time: Synchronization time. @@ -2565,18 +2686,19 @@ class SynchronizationSetting(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization setting.Constant filled by server. Possible + values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.SynchronizationSettingKind """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, } @@ -2584,6 +2706,7 @@ class SynchronizationSetting(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, } @@ -2611,13 +2734,13 @@ class ScheduledSynchronizationSetting(SynchronizationSetting): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization setting.Constant filled by server. Possible + values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.SynchronizationSettingKind :ivar created_at: Time at which the synchronization setting was created. :vartype created_at: ~datetime.datetime :ivar provisioning_state: Gets or sets the provisioning state. Possible values include: @@ -2635,6 +2758,7 @@ class ScheduledSynchronizationSetting(SynchronizationSetting): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'created_at': {'readonly': True}, @@ -2647,6 +2771,7 @@ class ScheduledSynchronizationSetting(SynchronizationSetting): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, @@ -2683,18 +2808,19 @@ class Trigger(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization on trigger.Constant filled by server. Possible + values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.TriggerKind """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, } @@ -2702,6 +2828,7 @@ class Trigger(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, } @@ -2729,13 +2856,13 @@ class ScheduledTrigger(Trigger): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization on trigger.Constant filled by server. Possible + values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.TriggerKind :ivar created_at: Time at which the trigger was created. :vartype created_at: ~datetime.datetime :ivar provisioning_state: Gets the provisioning state. Possible values include: "Succeeded", @@ -2759,6 +2886,7 @@ class ScheduledTrigger(Trigger): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'created_at': {'readonly': True}, @@ -2772,6 +2900,7 @@ class ScheduledTrigger(Trigger): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, @@ -2807,6 +2936,8 @@ class Share(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar created_at: Time at which the share was created. @@ -2829,6 +2960,7 @@ class Share(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'created_at': {'readonly': True}, 'provisioning_state': {'readonly': True}, @@ -2839,6 +2971,7 @@ class Share(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, 'description': {'key': 'properties.description', 'type': 'str'}, @@ -2903,10 +3036,14 @@ class ShareSubscription(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar created_at: Time at which the share subscription was created. :vartype created_at: ~datetime.datetime + :param expiration_date: The expiration date of the share subscription. + :type expiration_date: ~datetime.datetime :param invitation_id: Required. The invitation id. :type invitation_id: str :ivar provider_email: Email of the provider who created the resource. @@ -2940,6 +3077,7 @@ class ShareSubscription(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'created_at': {'readonly': True}, 'invitation_id': {'required': True}, @@ -2960,8 +3098,10 @@ class ShareSubscription(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'}, 'invitation_id': {'key': 'properties.invitationId', 'type': 'str'}, 'provider_email': {'key': 'properties.providerEmail', 'type': 'str'}, 'provider_name': {'key': 'properties.providerName', 'type': 'str'}, @@ -2983,6 +3123,7 @@ def __init__( ): super(ShareSubscription, self).__init__(**kwargs) self.created_at = None + self.expiration_date = kwargs.get('expiration_date', None) self.invitation_id = kwargs['invitation_id'] self.provider_email = None self.provider_name = None @@ -3245,13 +3386,15 @@ class SqlDBTableDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param database_name: Database name of the source data set. :type database_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -3267,6 +3410,7 @@ class SqlDBTableDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -3275,6 +3419,7 @@ class SqlDBTableDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'database_name': {'key': 'properties.databaseName', 'type': 'str'}, @@ -3308,13 +3453,15 @@ class SqlDBTableDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param database_name: Required. DatabaseName name of the sink data set. :type database_name: str :param data_set_id: Required. The id of the source data set. @@ -3336,6 +3483,7 @@ class SqlDBTableDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'database_name': {'required': True}, @@ -3350,6 +3498,7 @@ class SqlDBTableDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'database_name': {'key': 'properties.databaseName', 'type': 'str'}, @@ -3387,13 +3536,15 @@ class SqlDWTableDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param data_warehouse_name: DataWarehouse name of the source data set. @@ -3409,6 +3560,7 @@ class SqlDWTableDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -3417,6 +3569,7 @@ class SqlDWTableDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -3450,13 +3603,15 @@ class SqlDWTableDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -3478,6 +3633,7 @@ class SqlDWTableDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -3492,6 +3648,7 @@ class SqlDWTableDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -3518,6 +3675,132 @@ def __init__( self.table_name = kwargs['table_name'] +class SynapseWorkspaceSqlPoolTableDataSet(DataSet): + """A Synapse Workspace Sql Pool Table data set. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource id of the azure resource. + :vartype id: str + :ivar name: Name of the azure resource. + :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData + :ivar type: Type of the azure resource. + :vartype type: str + :param kind: Required. Kind of data set.Constant filled by server. Possible values include: + "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", + "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind + :ivar data_set_id: Unique id for identifying a data set resource. + :vartype data_set_id: str + :param synapse_workspace_sql_pool_table_resource_id: Required. Resource id of the Synapse + Workspace SQL Pool Table. + :type synapse_workspace_sql_pool_table_resource_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'system_data': {'readonly': True}, + 'type': {'readonly': True}, + 'kind': {'required': True}, + 'data_set_id': {'readonly': True}, + 'synapse_workspace_sql_pool_table_resource_id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, + 'synapse_workspace_sql_pool_table_resource_id': {'key': 'properties.synapseWorkspaceSqlPoolTableResourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseWorkspaceSqlPoolTableDataSet, self).__init__(**kwargs) + self.kind = 'SynapseWorkspaceSqlPoolTable' # type: str + self.data_set_id = None + self.synapse_workspace_sql_pool_table_resource_id = kwargs['synapse_workspace_sql_pool_table_resource_id'] + + +class SynapseWorkspaceSqlPoolTableDataSetMapping(DataSetMapping): + """A Synapse Workspace Sql Pool Table data set mapping. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource id of the azure resource. + :vartype id: str + :ivar name: Name of the azure resource. + :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData + :ivar type: Type of the azure resource. + :vartype type: str + :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values + include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind + :param data_set_id: Required. The id of the source data set. + :type data_set_id: str + :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values + include: "Ok", "Broken". + :vartype data_set_mapping_status: str or ~azure.mgmt.datashare.models.DataSetMappingStatus + :ivar provisioning_state: Provisioning state of the data set mapping. Possible values include: + "Succeeded", "Creating", "Deleting", "Moving", "Failed". + :vartype provisioning_state: str or ~azure.mgmt.datashare.models.ProvisioningState + :param synapse_workspace_sql_pool_table_resource_id: Required. Resource id of the Synapse + Workspace SQL Pool Table. + :type synapse_workspace_sql_pool_table_resource_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'system_data': {'readonly': True}, + 'type': {'readonly': True}, + 'kind': {'required': True}, + 'data_set_id': {'required': True}, + 'data_set_mapping_status': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'synapse_workspace_sql_pool_table_resource_id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, + 'data_set_mapping_status': {'key': 'properties.dataSetMappingStatus', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'synapse_workspace_sql_pool_table_resource_id': {'key': 'properties.synapseWorkspaceSqlPoolTableResourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SynapseWorkspaceSqlPoolTableDataSetMapping, self).__init__(**kwargs) + self.kind = 'SynapseWorkspaceSqlPoolTable' # type: str + self.data_set_id = kwargs['data_set_id'] + self.data_set_mapping_status = None + self.provisioning_state = None + self.synapse_workspace_sql_pool_table_resource_id = kwargs['synapse_workspace_sql_pool_table_resource_id'] + + class SynchronizationDetails(msrest.serialization.Model): """Synchronization details at data set level. @@ -3527,7 +3810,8 @@ class SynchronizationDetails(msrest.serialization.Model): :vartype data_set_id: str :ivar data_set_type: Type of the data set. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", - "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable". + "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". :vartype data_set_type: str or ~azure.mgmt.datashare.models.DataSetType :ivar duration_ms: Duration of data set level copy. :vartype duration_ms: int @@ -3693,6 +3977,47 @@ def __init__( self.synchronization_mode = kwargs.get('synchronization_mode', None) +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure.mgmt.datashare.models.CreatedByType + :param last_modified_at: The type of identity that last modified the resource. + :type last_modified_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~azure.mgmt.datashare.models.LastModifiedByType + """ + + _attribute_map = { + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_at = kwargs.get('created_at', None) + self.created_by = kwargs.get('created_by', None) + self.created_by_type = kwargs.get('created_by_type', None) + self.last_modified_at = kwargs.get('last_modified_at', None) + self.last_modified_by = kwargs.get('last_modified_by', None) + self.last_modified_by_type = kwargs.get('last_modified_by_type', None) + + class TriggerList(msrest.serialization.Model): """List response for get triggers. diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_models_py3.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_models_py3.py index cc151556ce31..f2f5217a4718 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_models_py3.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/models/_models_py3.py @@ -15,35 +15,79 @@ from ._data_share_management_client_enums import * -class DefaultDto(msrest.serialization.Model): +class ProxyDto(msrest.serialization.Model): + """Base data transfer object implementation for proxy resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource id of the azure resource. + :vartype id: str + :ivar name: Name of the azure resource. + :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData + :ivar type: Type of the azure resource. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'system_data': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyDto, self).__init__(**kwargs) + self.id = None + self.name = None + self.system_data = None + self.type = None + + +class DefaultDto(ProxyDto): """Base data transfer object implementation for default resources. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The resource id of the azure resource. :vartype id: str - :param location: Location of the azure resource. - :type location: str :ivar name: Name of the azure resource. :vartype name: str - :param tags: A set of tags. Tags on the azure resource. - :type tags: dict[str, str] + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str + :param location: Location of the azure resource. + :type location: str + :param tags: A set of tags. Tags on the azure resource. + :type tags: dict[str, str] """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( @@ -54,11 +98,8 @@ def __init__( **kwargs ): super(DefaultDto, self).__init__(**kwargs) - self.id = None self.location = location - self.name = None self.tags = tags - self.type = None class Account(DefaultDto): @@ -70,14 +111,16 @@ class Account(DefaultDto): :ivar id: The resource id of the azure resource. :vartype id: str - :param location: Location of the azure resource. - :type location: str :ivar name: Name of the azure resource. :vartype name: str - :param tags: A set of tags. Tags on the azure resource. - :type tags: dict[str, str] + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str + :param location: Location of the azure resource. + :type location: str + :param tags: A set of tags. Tags on the azure resource. + :type tags: dict[str, str] :param identity: Required. Identity Info on the Account. :type identity: ~azure.mgmt.datashare.models.Identity :ivar created_at: Time at which the account was created. @@ -94,6 +137,7 @@ class Account(DefaultDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'identity': {'required': True}, 'created_at': {'readonly': True}, @@ -104,10 +148,11 @@ class Account(DefaultDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, 'identity': {'key': 'identity', 'type': 'Identity'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, @@ -184,46 +229,11 @@ def __init__( self.tags = tags -class ProxyDto(msrest.serialization.Model): - """Base data transfer object implementation for proxy resources. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource id of the azure resource. - :vartype id: str - :ivar name: Name of the azure resource. - :vartype name: str - :ivar type: Type of the azure resource. - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ProxyDto, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - - class DataSet(ProxyDto): """A DataSet data transfer object. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ADLSGen1FileDataSet, ADLSGen1FolderDataSet, ADLSGen2FileDataSet, ADLSGen2FileSystemDataSet, ADLSGen2FolderDataSet, BlobDataSet, BlobFolderDataSet, BlobContainerDataSet, KustoClusterDataSet, KustoDatabaseDataSet, SqlDBTableDataSet, SqlDWTableDataSet. + sub-classes are: ADLSGen1FileDataSet, ADLSGen1FolderDataSet, ADLSGen2FileDataSet, ADLSGen2FileSystemDataSet, ADLSGen2FolderDataSet, BlobDataSet, BlobFolderDataSet, BlobContainerDataSet, KustoClusterDataSet, KustoDatabaseDataSet, SqlDBTableDataSet, SqlDWTableDataSet, SynapseWorkspaceSqlPoolTableDataSet. Variables are only populated by the server, and will be ignored when sending a request. @@ -233,18 +243,21 @@ class DataSet(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, } @@ -252,12 +265,13 @@ class DataSet(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, } _subtype_map = { - 'kind': {'AdlsGen1File': 'ADLSGen1FileDataSet', 'AdlsGen1Folder': 'ADLSGen1FolderDataSet', 'AdlsGen2File': 'ADLSGen2FileDataSet', 'AdlsGen2FileSystem': 'ADLSGen2FileSystemDataSet', 'AdlsGen2Folder': 'ADLSGen2FolderDataSet', 'Blob': 'BlobDataSet', 'BlobFolder': 'BlobFolderDataSet', 'Container': 'BlobContainerDataSet', 'KustoCluster': 'KustoClusterDataSet', 'KustoDatabase': 'KustoDatabaseDataSet', 'SqlDBTable': 'SqlDBTableDataSet', 'SqlDWTable': 'SqlDWTableDataSet'} + 'kind': {'AdlsGen1File': 'ADLSGen1FileDataSet', 'AdlsGen1Folder': 'ADLSGen1FolderDataSet', 'AdlsGen2File': 'ADLSGen2FileDataSet', 'AdlsGen2FileSystem': 'ADLSGen2FileSystemDataSet', 'AdlsGen2Folder': 'ADLSGen2FolderDataSet', 'Blob': 'BlobDataSet', 'BlobFolder': 'BlobFolderDataSet', 'Container': 'BlobContainerDataSet', 'KustoCluster': 'KustoClusterDataSet', 'KustoDatabase': 'KustoDatabaseDataSet', 'SqlDBTable': 'SqlDBTableDataSet', 'SqlDWTable': 'SqlDWTableDataSet', 'SynapseWorkspaceSqlPoolTable': 'SynapseWorkspaceSqlPoolTableDataSet'} } def __init__( @@ -279,13 +293,15 @@ class ADLSGen1FileDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param account_name: Required. The ADLS account name. :type account_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -303,6 +319,7 @@ class ADLSGen1FileDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'account_name': {'required': True}, @@ -316,6 +333,7 @@ class ADLSGen1FileDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'account_name': {'key': 'properties.accountName', 'type': 'str'}, @@ -357,13 +375,15 @@ class ADLSGen1FolderDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param account_name: Required. The ADLS account name. :type account_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -379,6 +399,7 @@ class ADLSGen1FolderDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'account_name': {'required': True}, @@ -391,6 +412,7 @@ class ADLSGen1FolderDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'account_name': {'key': 'properties.accountName', 'type': 'str'}, @@ -429,13 +451,15 @@ class ADLSGen2FileDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param file_path: Required. File path within the file system. @@ -453,6 +477,7 @@ class ADLSGen2FileDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -466,6 +491,7 @@ class ADLSGen2FileDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -500,7 +526,7 @@ class DataSetMapping(ProxyDto): """A data set mapping data transfer object. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ADLSGen2FileDataSetMapping, ADLSGen2FileSystemDataSetMapping, ADLSGen2FolderDataSetMapping, BlobDataSetMapping, BlobFolderDataSetMapping, BlobContainerDataSetMapping, KustoClusterDataSetMapping, KustoDatabaseDataSetMapping, SqlDBTableDataSetMapping, SqlDWTableDataSetMapping. + sub-classes are: ADLSGen2FileDataSetMapping, ADLSGen2FileSystemDataSetMapping, ADLSGen2FolderDataSetMapping, BlobDataSetMapping, BlobFolderDataSetMapping, BlobContainerDataSetMapping, KustoClusterDataSetMapping, KustoDatabaseDataSetMapping, SqlDBTableDataSetMapping, SqlDWTableDataSetMapping, SynapseWorkspaceSqlPoolTableDataSetMapping. Variables are only populated by the server, and will be ignored when sending a request. @@ -510,18 +536,21 @@ class DataSetMapping(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, } @@ -529,12 +558,13 @@ class DataSetMapping(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, } _subtype_map = { - 'kind': {'AdlsGen2File': 'ADLSGen2FileDataSetMapping', 'AdlsGen2FileSystem': 'ADLSGen2FileSystemDataSetMapping', 'AdlsGen2Folder': 'ADLSGen2FolderDataSetMapping', 'Blob': 'BlobDataSetMapping', 'BlobFolder': 'BlobFolderDataSetMapping', 'Container': 'BlobContainerDataSetMapping', 'KustoCluster': 'KustoClusterDataSetMapping', 'KustoDatabase': 'KustoDatabaseDataSetMapping', 'SqlDBTable': 'SqlDBTableDataSetMapping', 'SqlDWTable': 'SqlDWTableDataSetMapping'} + 'kind': {'AdlsGen2File': 'ADLSGen2FileDataSetMapping', 'AdlsGen2FileSystem': 'ADLSGen2FileSystemDataSetMapping', 'AdlsGen2Folder': 'ADLSGen2FolderDataSetMapping', 'Blob': 'BlobDataSetMapping', 'BlobFolder': 'BlobFolderDataSetMapping', 'Container': 'BlobContainerDataSetMapping', 'KustoCluster': 'KustoClusterDataSetMapping', 'KustoDatabase': 'KustoDatabaseDataSetMapping', 'SqlDBTable': 'SqlDBTableDataSetMapping', 'SqlDWTable': 'SqlDWTableDataSetMapping', 'SynapseWorkspaceSqlPoolTable': 'SynapseWorkspaceSqlPoolTableDataSetMapping'} } def __init__( @@ -556,13 +586,15 @@ class ADLSGen2FileDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -588,6 +620,7 @@ class ADLSGen2FileDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -603,6 +636,7 @@ class ADLSGen2FileDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -652,13 +686,15 @@ class ADLSGen2FileSystemDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param file_system: Required. The file system name. @@ -674,6 +710,7 @@ class ADLSGen2FileSystemDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -686,6 +723,7 @@ class ADLSGen2FileSystemDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -724,13 +762,15 @@ class ADLSGen2FileSystemDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -752,6 +792,7 @@ class ADLSGen2FileSystemDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -766,6 +807,7 @@ class ADLSGen2FileSystemDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -809,13 +851,15 @@ class ADLSGen2FolderDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param file_system: Required. File system to which the folder belongs. @@ -833,6 +877,7 @@ class ADLSGen2FolderDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -846,6 +891,7 @@ class ADLSGen2FolderDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -887,13 +933,15 @@ class ADLSGen2FolderDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -917,6 +965,7 @@ class ADLSGen2FolderDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -932,6 +981,7 @@ class ADLSGen2FolderDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -978,13 +1028,15 @@ class BlobContainerDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param container_name: Required. BLOB Container name. :type container_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -1000,6 +1052,7 @@ class BlobContainerDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1012,6 +1065,7 @@ class BlobContainerDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1050,13 +1104,15 @@ class BlobContainerDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param container_name: Required. BLOB Container name. :type container_name: str :param data_set_id: Required. The id of the source data set. @@ -1078,6 +1134,7 @@ class BlobContainerDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1092,6 +1149,7 @@ class BlobContainerDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1135,13 +1193,15 @@ class BlobDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param container_name: Required. Container that has the file path. :type container_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -1159,6 +1219,7 @@ class BlobDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1172,6 +1233,7 @@ class BlobDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1213,13 +1275,15 @@ class BlobDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param container_name: Required. Container that has the file path. :type container_name: str :param data_set_id: Required. The id of the source data set. @@ -1245,6 +1309,7 @@ class BlobDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1260,6 +1325,7 @@ class BlobDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1309,13 +1375,15 @@ class BlobFolderDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param container_name: Required. Container that has the file path. :type container_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -1333,6 +1401,7 @@ class BlobFolderDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1346,6 +1415,7 @@ class BlobFolderDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1387,13 +1457,15 @@ class BlobFolderDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param container_name: Required. Container that has the file path. :type container_name: str :param data_set_id: Required. The id of the source data set. @@ -1417,6 +1489,7 @@ class BlobFolderDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'container_name': {'required': True}, @@ -1432,6 +1505,7 @@ class BlobFolderDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'container_name': {'key': 'properties.containerName', 'type': 'str'}, @@ -1478,12 +1552,17 @@ class ConsumerInvitation(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar data_set_count: Number of data sets in a share. :vartype data_set_count: int :ivar description: Description shared when the invitation was created. :vartype description: str + :ivar expiration_date: The expiration date for the share subscription created by accepting the + invitation. + :vartype expiration_date: ~datetime.datetime :param invitation_id: Required. Unique id of the invitation. :type invitation_id: str :ivar invitation_status: The status of the invitation. Possible values include: "Pending", @@ -1514,9 +1593,11 @@ class ConsumerInvitation(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'data_set_count': {'readonly': True}, 'description': {'readonly': True}, + 'expiration_date': {'readonly': True}, 'invitation_id': {'required': True}, 'invitation_status': {'readonly': True}, 'location': {'readonly': True}, @@ -1534,9 +1615,11 @@ class ConsumerInvitation(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'data_set_count': {'key': 'properties.dataSetCount', 'type': 'int'}, 'description': {'key': 'properties.description', 'type': 'str'}, + 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'}, 'invitation_id': {'key': 'properties.invitationId', 'type': 'str'}, 'invitation_status': {'key': 'properties.invitationStatus', 'type': 'str'}, 'location': {'key': 'properties.location', 'type': 'str'}, @@ -1560,6 +1643,7 @@ def __init__( super(ConsumerInvitation, self).__init__(**kwargs) self.data_set_count = None self.description = None + self.expiration_date = None self.invitation_id = invitation_id self.invitation_status = None self.location = None @@ -1615,6 +1699,8 @@ class ConsumerSourceDataSet(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar data_set_id: DataSet Id. @@ -1627,13 +1713,15 @@ class ConsumerSourceDataSet(ProxyDto): :vartype data_set_path: str :ivar data_set_type: Type of data set. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", - "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable". + "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". :vartype data_set_type: str or ~azure.mgmt.datashare.models.DataSetType """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'data_set_id': {'readonly': True}, 'data_set_location': {'readonly': True}, @@ -1645,6 +1733,7 @@ class ConsumerSourceDataSet(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, 'data_set_location': {'key': 'properties.dataSetLocation', 'type': 'str'}, @@ -1902,8 +1991,12 @@ class Invitation(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str + :param expiration_date: The expiration date for the invitation and share subscription. + :type expiration_date: ~datetime.datetime :ivar invitation_id: unique invitation id. :vartype invitation_id: str :ivar invitation_status: The status of the invitation. Possible values include: "Pending", @@ -1930,6 +2023,7 @@ class Invitation(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'invitation_id': {'readonly': True}, 'invitation_status': {'readonly': True}, @@ -1942,7 +2036,9 @@ class Invitation(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, + 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'}, 'invitation_id': {'key': 'properties.invitationId', 'type': 'str'}, 'invitation_status': {'key': 'properties.invitationStatus', 'type': 'str'}, 'responded_at': {'key': 'properties.respondedAt', 'type': 'iso-8601'}, @@ -1957,12 +2053,14 @@ class Invitation(ProxyDto): def __init__( self, *, + expiration_date: Optional[datetime.datetime] = None, target_active_directory_id: Optional[str] = None, target_email: Optional[str] = None, target_object_id: Optional[str] = None, **kwargs ): super(Invitation, self).__init__(**kwargs) + self.expiration_date = expiration_date self.invitation_id = None self.invitation_status = None self.responded_at = None @@ -2017,13 +2115,15 @@ class KustoClusterDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param kusto_cluster_resource_id: Required. Resource id of the kusto cluster. @@ -2038,6 +2138,7 @@ class KustoClusterDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -2049,6 +2150,7 @@ class KustoClusterDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -2082,13 +2184,15 @@ class KustoClusterDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -2106,6 +2210,7 @@ class KustoClusterDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -2118,6 +2223,7 @@ class KustoClusterDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -2154,13 +2260,15 @@ class KustoDatabaseDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param kusto_database_resource_id: Required. Resource id of the kusto database. @@ -2175,6 +2283,7 @@ class KustoDatabaseDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -2186,6 +2295,7 @@ class KustoDatabaseDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -2219,13 +2329,15 @@ class KustoDatabaseDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -2243,6 +2355,7 @@ class KustoDatabaseDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -2255,6 +2368,7 @@ class KustoDatabaseDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -2356,6 +2470,8 @@ class OperationMetaMetricSpecification(msrest.serialization.Model): :type display_name: str :param enable_regional_mdm_account: enable regional mdm account. :type enable_regional_mdm_account: str + :param fill_gap_with_zero: fill gap with zero. + :type fill_gap_with_zero: bool :param internal_metric_name: internal metric name. :type internal_metric_name: str :param name: name of the metric. @@ -2377,6 +2493,7 @@ class OperationMetaMetricSpecification(msrest.serialization.Model): 'display_description': {'key': 'displayDescription', 'type': 'str'}, 'display_name': {'key': 'displayName', 'type': 'str'}, 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'}, 'internal_metric_name': {'key': 'internalMetricName', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'resource_id_dimension_name_override': {'key': 'resourceIdDimensionNameOverride', 'type': 'str'}, @@ -2393,6 +2510,7 @@ def __init__( display_description: Optional[str] = None, display_name: Optional[str] = None, enable_regional_mdm_account: Optional[str] = None, + fill_gap_with_zero: Optional[bool] = None, internal_metric_name: Optional[str] = None, name: Optional[str] = None, resource_id_dimension_name_override: Optional[str] = None, @@ -2407,6 +2525,7 @@ def __init__( self.display_description = display_description self.display_name = display_name self.enable_regional_mdm_account = enable_regional_mdm_account + self.fill_gap_with_zero = fill_gap_with_zero self.internal_metric_name = internal_metric_name self.name = name self.resource_id_dimension_name_override = resource_id_dimension_name_override @@ -2566,6 +2685,8 @@ class ProviderShareSubscription(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar consumer_email: Email of the consumer who created the share subscription. @@ -2576,6 +2697,8 @@ class ProviderShareSubscription(ProxyDto): :vartype consumer_tenant_name: str :ivar created_at: created at. :vartype created_at: ~datetime.datetime + :param expiration_date: Expiration date of the share subscription in UTC format. + :type expiration_date: ~datetime.datetime :ivar provider_email: Email of the provider who created the share. :vartype provider_email: str :ivar provider_name: Name of the provider who created the share. @@ -2592,6 +2715,7 @@ class ProviderShareSubscription(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'consumer_email': {'readonly': True}, 'consumer_name': {'readonly': True}, @@ -2607,11 +2731,13 @@ class ProviderShareSubscription(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'consumer_email': {'key': 'properties.consumerEmail', 'type': 'str'}, 'consumer_name': {'key': 'properties.consumerName', 'type': 'str'}, 'consumer_tenant_name': {'key': 'properties.consumerTenantName', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'}, 'provider_email': {'key': 'properties.providerEmail', 'type': 'str'}, 'provider_name': {'key': 'properties.providerName', 'type': 'str'}, 'shared_at': {'key': 'properties.sharedAt', 'type': 'iso-8601'}, @@ -2621,6 +2747,8 @@ class ProviderShareSubscription(ProxyDto): def __init__( self, + *, + expiration_date: Optional[datetime.datetime] = None, **kwargs ): super(ProviderShareSubscription, self).__init__(**kwargs) @@ -2628,6 +2756,7 @@ def __init__( self.consumer_name = None self.consumer_tenant_name = None self.created_at = None + self.expiration_date = expiration_date self.provider_email = None self.provider_name = None self.shared_at = None @@ -2675,11 +2804,9 @@ class SourceShareSynchronizationSetting(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization setting on share.Constant filled by server. + Possible values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.SourceShareSynchronizationSettingKind """ _validation = { @@ -2707,11 +2834,9 @@ class ScheduledSourceSynchronizationSetting(SourceShareSynchronizationSetting): All required parameters must be populated in order to send to Azure. - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization setting on share.Constant filled by server. + Possible values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.SourceShareSynchronizationSettingKind :param recurrence_interval: Recurrence Interval. Possible values include: "Hour", "Day". :type recurrence_interval: str or ~azure.mgmt.datashare.models.RecurrenceInterval :param synchronization_time: Synchronization time. @@ -2755,18 +2880,19 @@ class SynchronizationSetting(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization setting.Constant filled by server. Possible + values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.SynchronizationSettingKind """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, } @@ -2774,6 +2900,7 @@ class SynchronizationSetting(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, } @@ -2801,13 +2928,13 @@ class ScheduledSynchronizationSetting(SynchronizationSetting): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization setting.Constant filled by server. Possible + values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.SynchronizationSettingKind :ivar created_at: Time at which the synchronization setting was created. :vartype created_at: ~datetime.datetime :ivar provisioning_state: Gets or sets the provisioning state. Possible values include: @@ -2825,6 +2952,7 @@ class ScheduledSynchronizationSetting(SynchronizationSetting): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'created_at': {'readonly': True}, @@ -2837,6 +2965,7 @@ class ScheduledSynchronizationSetting(SynchronizationSetting): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, @@ -2876,18 +3005,19 @@ class Trigger(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization on trigger.Constant filled by server. Possible + values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.TriggerKind """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, } @@ -2895,6 +3025,7 @@ class Trigger(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, } @@ -2922,13 +3053,13 @@ class ScheduledTrigger(Trigger): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str - :param kind: Required. Kind of synchronization.Constant filled by server. Possible values - include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + :param kind: Required. Kind of synchronization on trigger.Constant filled by server. Possible + values include: "ScheduleBased". + :type kind: str or ~azure.mgmt.datashare.models.TriggerKind :ivar created_at: Time at which the trigger was created. :vartype created_at: ~datetime.datetime :ivar provisioning_state: Gets the provisioning state. Possible values include: "Succeeded", @@ -2952,6 +3083,7 @@ class ScheduledTrigger(Trigger): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'created_at': {'readonly': True}, @@ -2965,6 +3097,7 @@ class ScheduledTrigger(Trigger): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, @@ -3004,6 +3137,8 @@ class Share(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar created_at: Time at which the share was created. @@ -3026,6 +3161,7 @@ class Share(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'created_at': {'readonly': True}, 'provisioning_state': {'readonly': True}, @@ -3036,6 +3172,7 @@ class Share(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, 'description': {'key': 'properties.description', 'type': 'str'}, @@ -3107,10 +3244,14 @@ class ShareSubscription(ProxyDto): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :ivar created_at: Time at which the share subscription was created. :vartype created_at: ~datetime.datetime + :param expiration_date: The expiration date of the share subscription. + :type expiration_date: ~datetime.datetime :param invitation_id: Required. The invitation id. :type invitation_id: str :ivar provider_email: Email of the provider who created the resource. @@ -3144,6 +3285,7 @@ class ShareSubscription(ProxyDto): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'created_at': {'readonly': True}, 'invitation_id': {'required': True}, @@ -3164,8 +3306,10 @@ class ShareSubscription(ProxyDto): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'}, + 'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'}, 'invitation_id': {'key': 'properties.invitationId', 'type': 'str'}, 'provider_email': {'key': 'properties.providerEmail', 'type': 'str'}, 'provider_name': {'key': 'properties.providerName', 'type': 'str'}, @@ -3186,10 +3330,12 @@ def __init__( *, invitation_id: str, source_share_location: str, + expiration_date: Optional[datetime.datetime] = None, **kwargs ): super(ShareSubscription, self).__init__(**kwargs) self.created_at = None + self.expiration_date = expiration_date self.invitation_id = invitation_id self.provider_email = None self.provider_name = None @@ -3476,13 +3622,15 @@ class SqlDBTableDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :param database_name: Database name of the source data set. :type database_name: str :ivar data_set_id: Unique id for identifying a data set resource. @@ -3498,6 +3646,7 @@ class SqlDBTableDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -3506,6 +3655,7 @@ class SqlDBTableDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'database_name': {'key': 'properties.databaseName', 'type': 'str'}, @@ -3544,13 +3694,15 @@ class SqlDBTableDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param database_name: Required. DatabaseName name of the sink data set. :type database_name: str :param data_set_id: Required. The id of the source data set. @@ -3572,6 +3724,7 @@ class SqlDBTableDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'database_name': {'required': True}, @@ -3586,6 +3739,7 @@ class SqlDBTableDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'database_name': {'key': 'properties.databaseName', 'type': 'str'}, @@ -3629,13 +3783,15 @@ class SqlDWTableDataSet(DataSet): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", - "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind :ivar data_set_id: Unique id for identifying a data set resource. :vartype data_set_id: str :param data_warehouse_name: DataWarehouse name of the source data set. @@ -3651,6 +3807,7 @@ class SqlDWTableDataSet(DataSet): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'readonly': True}, @@ -3659,6 +3816,7 @@ class SqlDWTableDataSet(DataSet): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -3697,13 +3855,15 @@ class SqlDWTableDataSetMapping(DataSetMapping): :vartype id: str :ivar name: Name of the azure resource. :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData :ivar type: Type of the azure resource. :vartype type: str :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", - "AdlsGen2File", "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", - "SqlDBTable", "SqlDWTable", "ScheduleBased". - :type kind: str or ~azure.mgmt.datashare.models.Kind + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind :param data_set_id: Required. The id of the source data set. :type data_set_id: str :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values @@ -3725,6 +3885,7 @@ class SqlDWTableDataSetMapping(DataSetMapping): _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, + 'system_data': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'data_set_id': {'required': True}, @@ -3739,6 +3900,7 @@ class SqlDWTableDataSetMapping(DataSetMapping): _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'type': {'key': 'type', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, @@ -3771,6 +3933,137 @@ def __init__( self.table_name = table_name +class SynapseWorkspaceSqlPoolTableDataSet(DataSet): + """A Synapse Workspace Sql Pool Table data set. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource id of the azure resource. + :vartype id: str + :ivar name: Name of the azure resource. + :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData + :ivar type: Type of the azure resource. + :vartype type: str + :param kind: Required. Kind of data set.Constant filled by server. Possible values include: + "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", + "AdlsGen1Folder", "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetKind + :ivar data_set_id: Unique id for identifying a data set resource. + :vartype data_set_id: str + :param synapse_workspace_sql_pool_table_resource_id: Required. Resource id of the Synapse + Workspace SQL Pool Table. + :type synapse_workspace_sql_pool_table_resource_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'system_data': {'readonly': True}, + 'type': {'readonly': True}, + 'kind': {'required': True}, + 'data_set_id': {'readonly': True}, + 'synapse_workspace_sql_pool_table_resource_id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, + 'synapse_workspace_sql_pool_table_resource_id': {'key': 'properties.synapseWorkspaceSqlPoolTableResourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + synapse_workspace_sql_pool_table_resource_id: str, + **kwargs + ): + super(SynapseWorkspaceSqlPoolTableDataSet, self).__init__(**kwargs) + self.kind = 'SynapseWorkspaceSqlPoolTable' # type: str + self.data_set_id = None + self.synapse_workspace_sql_pool_table_resource_id = synapse_workspace_sql_pool_table_resource_id + + +class SynapseWorkspaceSqlPoolTableDataSetMapping(DataSetMapping): + """A Synapse Workspace Sql Pool Table data set mapping. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource id of the azure resource. + :vartype id: str + :ivar name: Name of the azure resource. + :vartype name: str + :ivar system_data: System Data of the Azure resource. + :vartype system_data: ~azure.mgmt.datashare.models.SystemData + :ivar type: Type of the azure resource. + :vartype type: str + :param kind: Required. Kind of data set mapping.Constant filled by server. Possible values + include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", + "AdlsGen2File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". + :type kind: str or ~azure.mgmt.datashare.models.DataSetMappingKind + :param data_set_id: Required. The id of the source data set. + :type data_set_id: str + :ivar data_set_mapping_status: Gets the status of the data set mapping. Possible values + include: "Ok", "Broken". + :vartype data_set_mapping_status: str or ~azure.mgmt.datashare.models.DataSetMappingStatus + :ivar provisioning_state: Provisioning state of the data set mapping. Possible values include: + "Succeeded", "Creating", "Deleting", "Moving", "Failed". + :vartype provisioning_state: str or ~azure.mgmt.datashare.models.ProvisioningState + :param synapse_workspace_sql_pool_table_resource_id: Required. Resource id of the Synapse + Workspace SQL Pool Table. + :type synapse_workspace_sql_pool_table_resource_id: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'system_data': {'readonly': True}, + 'type': {'readonly': True}, + 'kind': {'required': True}, + 'data_set_id': {'required': True}, + 'data_set_mapping_status': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'synapse_workspace_sql_pool_table_resource_id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'type': {'key': 'type', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'data_set_id': {'key': 'properties.dataSetId', 'type': 'str'}, + 'data_set_mapping_status': {'key': 'properties.dataSetMappingStatus', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'synapse_workspace_sql_pool_table_resource_id': {'key': 'properties.synapseWorkspaceSqlPoolTableResourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + data_set_id: str, + synapse_workspace_sql_pool_table_resource_id: str, + **kwargs + ): + super(SynapseWorkspaceSqlPoolTableDataSetMapping, self).__init__(**kwargs) + self.kind = 'SynapseWorkspaceSqlPoolTable' # type: str + self.data_set_id = data_set_id + self.data_set_mapping_status = None + self.provisioning_state = None + self.synapse_workspace_sql_pool_table_resource_id = synapse_workspace_sql_pool_table_resource_id + + class SynchronizationDetails(msrest.serialization.Model): """Synchronization details at data set level. @@ -3780,7 +4073,8 @@ class SynchronizationDetails(msrest.serialization.Model): :vartype data_set_id: str :ivar data_set_type: Type of the data set. Possible values include: "Blob", "Container", "BlobFolder", "AdlsGen2FileSystem", "AdlsGen2Folder", "AdlsGen2File", "AdlsGen1Folder", - "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable". + "AdlsGen1File", "KustoCluster", "KustoDatabase", "SqlDBTable", "SqlDWTable", + "SynapseWorkspaceSqlPoolTable". :vartype data_set_type: str or ~azure.mgmt.datashare.models.DataSetType :ivar duration_ms: Duration of data set level copy. :vartype duration_ms: int @@ -3954,6 +4248,54 @@ def __init__( self.synchronization_mode = synchronization_mode +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure.mgmt.datashare.models.CreatedByType + :param last_modified_at: The type of identity that last modified the resource. + :type last_modified_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~azure.mgmt.datashare.models.LastModifiedByType + """ + + _attribute_map = { + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + } + + def __init__( + self, + *, + created_at: Optional[datetime.datetime] = None, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "LastModifiedByType"]] = None, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_at = created_at + self.created_by = created_by + self.created_by_type = created_by_type + self.last_modified_at = last_modified_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + + class TriggerList(msrest.serialization.Model): """List response for get triggers. diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_accounts_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_accounts_operations.py index 80d15a5a551a..3f997aae5bf9 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_accounts_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_accounts_operations.py @@ -69,7 +69,7 @@ def list_by_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -111,7 +111,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -147,7 +147,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -173,7 +173,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Account', pipeline_response) @@ -197,7 +197,7 @@ def _create_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -228,7 +228,7 @@ def _create_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -263,8 +263,8 @@ def begin_create( :type account: ~azure.mgmt.datashare.models.Account :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Account or the result of cls(response) @@ -329,7 +329,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -355,7 +355,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -385,8 +385,8 @@ def begin_delete( :type account_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response) @@ -466,7 +466,7 @@ def update( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -497,7 +497,7 @@ def update( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Account', pipeline_response) @@ -533,7 +533,7 @@ def list_by_resource_group( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -576,7 +576,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_consumer_invitations_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_consumer_invitations_operations.py index 927be908db35..9e9d767d6e00 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_consumer_invitations_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_consumer_invitations_operations.py @@ -67,7 +67,7 @@ def list_invitations( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -105,7 +105,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -114,7 +114,7 @@ def get_next(next_link=None): return ItemPaged( get_next, extract_data ) - list_invitations.metadata = {'url': '/providers/Microsoft.DataShare/ListInvitations'} # type: ignore + list_invitations.metadata = {'url': '/providers/Microsoft.DataShare/listInvitations'} # type: ignore def get( self, @@ -141,7 +141,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -166,7 +166,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ConsumerInvitation', pipeline_response) @@ -202,7 +202,7 @@ def reject_invitation( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -231,7 +231,7 @@ def reject_invitation( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ConsumerInvitation', pipeline_response) @@ -240,4 +240,4 @@ def reject_invitation( return cls(pipeline_response, deserialized, {}) return deserialized - reject_invitation.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/RejectInvitation'} # type: ignore + reject_invitation.metadata = {'url': '/providers/Microsoft.DataShare/locations/{location}/rejectInvitation'} # type: ignore diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_consumer_source_data_sets_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_consumer_source_data_sets_operations.py index 70df87d3ce7c..317387cf8124 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_consumer_source_data_sets_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_consumer_source_data_sets_operations.py @@ -76,7 +76,7 @@ def list_by_share_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -121,7 +121,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -130,4 +130,4 @@ def get_next(next_link=None): return ItemPaged( get_next, extract_data ) - list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/ConsumerSourceDataSets'} # type: ignore + list_by_share_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/consumerSourceDataSets'} # type: ignore diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_data_set_mappings_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_data_set_mappings_operations.py index 7ae0d1fb2534..7bb25d07dd80 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_data_set_mappings_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_data_set_mappings_operations.py @@ -76,7 +76,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -104,7 +104,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('DataSetMapping', pipeline_response) @@ -151,7 +151,7 @@ def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -184,7 +184,7 @@ def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -230,7 +230,7 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -258,7 +258,7 @@ def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -303,7 +303,7 @@ def list_by_share_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -352,7 +352,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_data_sets_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_data_sets_operations.py index 762e4624e6f8..8f2abfead10c 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_data_sets_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_data_sets_operations.py @@ -78,7 +78,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -106,7 +106,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('DataSet', pipeline_response) @@ -151,7 +151,7 @@ def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -184,7 +184,7 @@ def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -213,7 +213,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -241,7 +241,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -272,8 +272,8 @@ def begin_delete( :type data_set_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -363,7 +363,7 @@ def list_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -412,7 +412,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_invitations_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_invitations_operations.py index 8ff075cdf586..c846516d09df 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_invitations_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_invitations_operations.py @@ -76,7 +76,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -104,7 +104,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Invitation', pipeline_response) @@ -149,7 +149,7 @@ def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -182,7 +182,7 @@ def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -228,7 +228,7 @@ def delete( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -256,7 +256,7 @@ def delete( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -301,7 +301,7 @@ def list_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -350,7 +350,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_operations.py index b6a62de670fb..081c217b7145 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_operations.py @@ -64,7 +64,7 @@ def list( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -100,7 +100,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_provider_share_subscriptions_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_provider_share_subscriptions_operations.py index e7daa7366864..615fb43fea3f 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_provider_share_subscriptions_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_provider_share_subscriptions_operations.py @@ -47,12 +47,91 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config + def adjust( + self, + resource_group_name, # type: str + account_name, # type: str + share_name, # type: str + provider_share_subscription_id, # type: str + provider_share_subscription, # type: "_models.ProviderShareSubscription" + **kwargs # type: Any + ): + # type: (...) -> "_models.ProviderShareSubscription" + """Adjust the expiration date of a share subscription in a provider share. + + Adjust a share subscription's expiration date in a provider share. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param account_name: The name of the share account. + :type account_name: str + :param share_name: The name of the share. + :type share_name: str + :param provider_share_subscription_id: To locate shareSubscription. + :type provider_share_subscription_id: str + :param provider_share_subscription: The provider share subscription. + :type provider_share_subscription: ~azure.mgmt.datashare.models.ProviderShareSubscription + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ProviderShareSubscription, or the result of cls(response) + :rtype: ~azure.mgmt.datashare.models.ProviderShareSubscription + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ProviderShareSubscription"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-09-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.adjust.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'accountName': self._serialize.url("account_name", account_name, 'str'), + 'shareName': self._serialize.url("share_name", share_name, 'str'), + 'providerShareSubscriptionId': self._serialize.url("provider_share_subscription_id", provider_share_subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(provider_share_subscription, 'ProviderShareSubscription') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ProviderShareSubscription', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + adjust.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/adjust'} # type: ignore + def reinstate( self, resource_group_name, # type: str account_name, # type: str share_name, # type: str provider_share_subscription_id, # type: str + provider_share_subscription, # type: "_models.ProviderShareSubscription" **kwargs # type: Any ): # type: (...) -> "_models.ProviderShareSubscription" @@ -68,6 +147,8 @@ def reinstate( :type share_name: str :param provider_share_subscription_id: To locate shareSubscription. :type provider_share_subscription_id: str + :param provider_share_subscription: The provider share subscription. + :type provider_share_subscription: ~azure.mgmt.datashare.models.ProviderShareSubscription :keyword callable cls: A custom type or function that will be passed the direct response :return: ProviderShareSubscription, or the result of cls(response) :rtype: ~azure.mgmt.datashare.models.ProviderShareSubscription @@ -78,7 +159,8 @@ def reinstate( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" + content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL @@ -98,15 +180,19 @@ def reinstate( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - request = self._client.post(url, query_parameters, header_parameters) + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(provider_share_subscription, 'ProviderShareSubscription') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProviderShareSubscription', pipeline_response) @@ -131,7 +217,7 @@ def _revoke_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -159,7 +245,7 @@ def _revoke_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -197,8 +283,8 @@ def begin_revoke( :type provider_share_subscription_id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either ProviderShareSubscription or the result of cls(response) @@ -285,7 +371,7 @@ def get_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -313,7 +399,7 @@ def get_by_share( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ProviderShareSubscription', pipeline_response) @@ -355,7 +441,7 @@ def list_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -400,7 +486,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_share_subscriptions_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_share_subscriptions_operations.py index 2f501937796f..b6326ae3422c 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_share_subscriptions_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_share_subscriptions_operations.py @@ -61,7 +61,7 @@ def _cancel_synchronization_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -93,7 +93,7 @@ def _cancel_synchronization_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -131,8 +131,8 @@ def begin_cancel_synchronization( :type share_subscription_synchronization: ~azure.mgmt.datashare.models.ShareSubscriptionSynchronization :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either ShareSubscriptionSynchronization or the result of cls(response) @@ -218,7 +218,7 @@ def list_source_share_synchronization_settings( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -263,7 +263,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -314,7 +314,7 @@ def list_synchronization_details( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = "application/json" accept = "application/json" @@ -371,7 +371,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -419,7 +419,7 @@ def list_synchronizations( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -468,7 +468,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -493,7 +493,7 @@ def _synchronize_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -525,7 +525,7 @@ def _synchronize_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -538,7 +538,7 @@ def _synchronize_initial( return cls(pipeline_response, deserialized, {}) return deserialized - _synchronize_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/Synchronize'} # type: ignore + _synchronize_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/synchronize'} # type: ignore def begin_synchronize( self, @@ -563,8 +563,8 @@ def begin_synchronize( :type synchronize: ~azure.mgmt.datashare.models.Synchronize :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either ShareSubscriptionSynchronization or the result of cls(response) @@ -617,7 +617,7 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_synchronize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/Synchronize'} # type: ignore + begin_synchronize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/synchronize'} # type: ignore def get( self, @@ -647,7 +647,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -674,7 +674,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('ShareSubscription', pipeline_response) @@ -716,7 +716,7 @@ def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -748,7 +748,7 @@ def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -776,7 +776,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -803,7 +803,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -836,8 +836,8 @@ def begin_delete( :type share_subscription_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response) @@ -925,7 +925,7 @@ def list_by_account( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -973,7 +973,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_shares_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_shares_operations.py index 2dd5e7b11d0e..36e44618e1c9 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_shares_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_shares_operations.py @@ -87,7 +87,7 @@ def list_synchronization_details( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = "application/json" accept = "application/json" @@ -144,7 +144,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -192,7 +192,7 @@ def list_synchronizations( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -241,7 +241,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -280,7 +280,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -307,7 +307,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Share', pipeline_response) @@ -349,7 +349,7 @@ def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -381,7 +381,7 @@ def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -409,7 +409,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -436,7 +436,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -469,8 +469,8 @@ def begin_delete( :type share_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response) @@ -558,7 +558,7 @@ def list_by_account( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -606,7 +606,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_synchronization_settings_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_synchronization_settings_operations.py index 05cd6af360ba..b945483b060c 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_synchronization_settings_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_synchronization_settings_operations.py @@ -78,7 +78,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -106,7 +106,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('SynchronizationSetting', pipeline_response) @@ -129,7 +129,7 @@ def create( # type: (...) -> "_models.SynchronizationSetting" """Adds a new synchronization setting to an existing share. - Create or update a synchronizationSetting. + Create a synchronizationSetting. :param resource_group_name: The resource group name. :type resource_group_name: str @@ -151,7 +151,7 @@ def create( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -184,7 +184,7 @@ def create( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -213,7 +213,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -241,7 +241,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -277,8 +277,8 @@ def begin_delete( :type synchronization_setting_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response) @@ -365,7 +365,7 @@ def list_by_share( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -410,7 +410,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_triggers_operations.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_triggers_operations.py index 0f4a69f922a4..0acfccffd94f 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_triggers_operations.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/operations/_triggers_operations.py @@ -78,7 +78,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -106,7 +106,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Trigger', pipeline_response) @@ -132,7 +132,7 @@ def _create_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -165,7 +165,7 @@ def _create_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -207,8 +207,8 @@ def begin_create( :type trigger: ~azure.mgmt.datashare.models.Trigger :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Trigger or the result of cls(response) @@ -279,7 +279,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" # Construct URL @@ -307,7 +307,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -343,8 +343,8 @@ def begin_delete( :type trigger_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: Pass in True if you'd like the ARMPolling polling method, + False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either OperationResponse or the result of cls(response) @@ -431,7 +431,7 @@ def list_by_share_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2019-11-01" + api_version = "2020-09-01" accept = "application/json" def prepare_request(next_link=None): @@ -476,7 +476,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.DataShareError, response) + error = self._deserialize.failsafe_deserialize(_models.DataShareError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) From a0501d516e831c8b2794ecfbcfd3949ea81bb914 Mon Sep 17 00:00:00 2001 From: PythonSdkPipelines Date: Mon, 12 Apr 2021 02:05:59 +0000 Subject: [PATCH 2/3] version,CHANGELOG --- .../azure-mgmt-datashare/CHANGELOG.md | 52 +++++++++++++++++++ sdk/datashare/azure-mgmt-datashare/README.md | 2 +- .../azure/mgmt/datashare/_version.py | 2 +- .../azure-mgmt-datashare/sdk_packaging.toml | 2 +- sdk/datashare/azure-mgmt-datashare/setup.py | 4 +- shared_requirements.txt | 1 + 6 files changed, 58 insertions(+), 5 deletions(-) diff --git a/sdk/datashare/azure-mgmt-datashare/CHANGELOG.md b/sdk/datashare/azure-mgmt-datashare/CHANGELOG.md index b1a4dc80c77c..25580ea1cfa4 100644 --- a/sdk/datashare/azure-mgmt-datashare/CHANGELOG.md +++ b/sdk/datashare/azure-mgmt-datashare/CHANGELOG.md @@ -1,5 +1,57 @@ # Release History +## 1.0.0b2 (2021-04-12) + +**Features** + + - Model BlobContainerDataSetMapping has a new parameter system_data + - Model SqlDWTableDataSet has a new parameter system_data + - Model ADLSGen1FolderDataSet has a new parameter system_data + - Model KustoDatabaseDataSetMapping has a new parameter system_data + - Model ConsumerSourceDataSet has a new parameter system_data + - Model ScheduledTrigger has a new parameter system_data + - Model DataSet has a new parameter system_data + - Model KustoClusterDataSetMapping has a new parameter system_data + - Model BlobDataSet has a new parameter system_data + - Model SynchronizationSetting has a new parameter system_data + - Model BlobFolderDataSetMapping has a new parameter system_data + - Model OperationMetaMetricSpecification has a new parameter fill_gap_with_zero + - Model ConsumerInvitation has a new parameter expiration_date + - Model ConsumerInvitation has a new parameter system_data + - Model ProviderShareSubscription has a new parameter expiration_date + - Model ProviderShareSubscription has a new parameter system_data + - Model ProxyDto has a new parameter system_data + - Model BlobFolderDataSet has a new parameter system_data + - Model ADLSGen2FolderDataSet has a new parameter system_data + - Model ScheduledSynchronizationSetting has a new parameter system_data + - Model SqlDBTableDataSet has a new parameter system_data + - Model Trigger has a new parameter system_data + - Model ADLSGen2FileDataSetMapping has a new parameter system_data + - Model BlobContainerDataSet has a new parameter system_data + - Model BlobDataSetMapping has a new parameter system_data + - Model DefaultDto has a new parameter system_data + - Model DataSetMapping has a new parameter system_data + - Model KustoDatabaseDataSet has a new parameter system_data + - Model ShareSubscription has a new parameter expiration_date + - Model ShareSubscription has a new parameter system_data + - Model KustoClusterDataSet has a new parameter system_data + - Model SqlDBTableDataSetMapping has a new parameter system_data + - Model SqlDWTableDataSetMapping has a new parameter system_data + - Model Account has a new parameter system_data + - Model Share has a new parameter system_data + - Model Invitation has a new parameter expiration_date + - Model Invitation has a new parameter system_data + - Model ADLSGen2FileSystemDataSetMapping has a new parameter system_data + - Model ADLSGen2FolderDataSetMapping has a new parameter system_data + - Model ADLSGen2FileDataSet has a new parameter system_data + - Model ADLSGen1FileDataSet has a new parameter system_data + - Model ADLSGen2FileSystemDataSet has a new parameter system_data + - Added operation ProviderShareSubscriptionsOperations.adjust + +**Breaking changes** + + - Operation ProviderShareSubscriptionsOperations.reinstate has a new signature + ## 1.0.0b1 (2020-12-04) This is beta preview version. diff --git a/sdk/datashare/azure-mgmt-datashare/README.md b/sdk/datashare/azure-mgmt-datashare/README.md index bd66bb7ce937..eef11b7cc000 100644 --- a/sdk/datashare/azure-mgmt-datashare/README.md +++ b/sdk/datashare/azure-mgmt-datashare/README.md @@ -1,6 +1,6 @@ # Microsoft Azure SDK for Python -This is the Microsoft Azure MyService Management Client Library. +This is the Microsoft Azure Datashare Management Client Library. This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py index d089401f1756..dfa6ee022f15 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2019-11-01T00:00:00.000Z" +VERSION = "1.0.0b2" diff --git a/sdk/datashare/azure-mgmt-datashare/sdk_packaging.toml b/sdk/datashare/azure-mgmt-datashare/sdk_packaging.toml index 53e86ee0c3ac..8c1563f2768b 100644 --- a/sdk/datashare/azure-mgmt-datashare/sdk_packaging.toml +++ b/sdk/datashare/azure-mgmt-datashare/sdk_packaging.toml @@ -1,7 +1,7 @@ [packaging] package_name = "azure-mgmt-datashare" package_nspkg = "azure-mgmt-nspkg" -package_pprint_name = "MyService Management" +package_pprint_name = "Datashare Management" package_doc_id = "" is_stable = false is_arm = true diff --git a/sdk/datashare/azure-mgmt-datashare/setup.py b/sdk/datashare/azure-mgmt-datashare/setup.py index 04329a1af934..0cfd0c57cb7f 100644 --- a/sdk/datashare/azure-mgmt-datashare/setup.py +++ b/sdk/datashare/azure-mgmt-datashare/setup.py @@ -13,7 +13,7 @@ # Change the PACKAGE_NAME only to change folder and different name PACKAGE_NAME = "azure-mgmt-datashare" -PACKAGE_PPRINT_NAME = "MyService Management" +PACKAGE_PPRINT_NAME = "Datashare Management" # a-b-c => a/b/c package_folder_path = PACKAGE_NAME.replace('-', '/') @@ -80,7 +80,7 @@ 'azure.mgmt', ]), install_requires=[ - 'msrest>=0.5.0', + 'msrest>=0.6.21', 'azure-common~=1.1', 'azure-mgmt-core>=1.2.0,<2.0.0' ], diff --git a/shared_requirements.txt b/shared_requirements.txt index e945638b35e2..b3785ec4b43f 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -215,3 +215,4 @@ opentelemetry-sdk<2.0.0,>=1.0.0 #override azure-mgmt-storagesync msrest>=0.6.21 #override azure-mgmt-resourcegraph msrest>=0.6.21 #override azure-mgmt-containerservice msrest>=0.6.21 +#override azure-mgmt-datashare msrest>=0.6.21 From c711825f937ee61adb3359559306367e8206933b Mon Sep 17 00:00:00 2001 From: Zed <601306339@qq.com> Date: Mon, 12 Apr 2021 14:10:20 +0800 Subject: [PATCH 3/3] datashare t2 changelog config --- sdk/datashare/azure-mgmt-datashare/CHANGELOG.md | 2 +- .../azure-mgmt-datashare/azure/mgmt/datashare/_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/datashare/azure-mgmt-datashare/CHANGELOG.md b/sdk/datashare/azure-mgmt-datashare/CHANGELOG.md index 25580ea1cfa4..d78d4623683b 100644 --- a/sdk/datashare/azure-mgmt-datashare/CHANGELOG.md +++ b/sdk/datashare/azure-mgmt-datashare/CHANGELOG.md @@ -1,6 +1,6 @@ # Release History -## 1.0.0b2 (2021-04-12) +## 1.0.0 (2021-04-12) **Features** diff --git a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py index dfa6ee022f15..c47f66669f1b 100644 --- a/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py +++ b/sdk/datashare/azure-mgmt-datashare/azure/mgmt/datashare/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0b2" +VERSION = "1.0.0"